home *** CD-ROM | disk | FTP | other *** search
- #include "Neural Network.h"
- #include <math.h>
-
- extern FILE * Jac;
-
- extern NeuralNet * theNet;
- extern DTypeVector yData;
- extern DTypeMatrix XData;
- extern DTypeVector Alpha[];
- extern DTypeMatrix Jac_T;
- extern DTypeVector Resid;
- extern DTypeVector dSquash[];
- extern DTypeMatrix Phi, T2;
-
- /***************************************************************************/
- /*-------------------
- Logistic squashing function and its derivative.
- */
- static logisticSquash_dSquash(vector,dvector)
- DTypeVector * vector;
- DTypeVector * dvector;
- {
- register int i;
- register DataType temp;
- register DataType * cell;
- register DataType * dcell;
-
- cell = *vector->cells;
- dcell = *dvector->cells;
-
- for(i=0; i<vector->rows; i++, cell++, dcell++)
- { temp = exp(-*cell);
- *cell = 1/(1+temp);
- *dcell = (*cell)*(1.0-*cell);
- }
- }
-
- /*-------------------
- Compute output for the neural net where the input values are specified by
- the Alpha[0] vector. Also computes derivative of squash function.
- */
- static DataType ComputeOutputJac()
- {
- int i;
-
- for(i=0; i < theNet->OutLayer-1; i++) /* skip the output layer since don't want squash */
- { Matrix_by_Vec(&theNet->W[i],&Alpha[i],&Alpha[i+1]);
- logisticSquash_dSquash(&Alpha[i+1], &dSquash[i+1]); /* logistic squash */
-
- }
- Matrix_by_Vec(&theNet->W[theNet->OutLayer-1], &Alpha[theNet->OutLayer-1], &Alpha[theNet->OutLayer]);
- /* no squash for output layer */
- return(**Alpha[theNet->OutLayer].cells);
- }
-
- /*-------------------
- Compute Jacobian for the SS problem where the input values are specified by
- the Alpha[0] vector. Expects activation values for each layer in
- Alpha[i], i=1,..,OutLayer
- */
- static ComputeJacobian(jcell)
- DataType * jcell; /* points to last element of ith column of Jacobian matrix */
- {
- int i,j,k;
- int OL;
- DataType * acell; /* pointer to elements of Alpha vector */
- DataType * pcell; /* pointer to elements of Phi matrix */
-
- OL = theNet->OutLayer;
-
- /**** First, get the alpha values just prior to the output layer ****/
- acell = *Alpha[OL-1].cells + Alpha[OL-1].rows-1;
- for(i=0; i<Alpha[OL-1].rows; i++, acell--, jcell -= Jac_T.cols)
- *jcell = *acell;
-
- /**** Second, calculate the Phi_OL-1 matrix and use with the second to last layer ****/
- Phi.cols = theNet->W[OL-1].cols;
- Matrix_by_Diag(&theNet->W[OL-1], &dSquash[OL-1], &Phi);
- acell = *Alpha[OL-2].cells + Alpha[OL-2].rows-1; /* points to the last element of the Alpha[OL-2] vector */
- for(j=0; j<Alpha[OL-2].rows; j++, acell--)
- { pcell = *Phi.cells + (Phi.cols-1); /* points to last element of Phi matrix, which is a row vector */
- for(k=0; k<Phi.cols; k++, pcell--, jcell -= Jac_T.cols)
- *jcell = (*acell)*(*pcell);
- }
-
- /**** Third, calculate Jacobian values for any other layers ****/
- for(i=OL-3; i>-1; i--) /* indexing specifies the Alpha vector */
- { T2.cols = theNet->W[i+1].cols;
- Matrix_by_Matrix(&Phi, &theNet->W[i+1], &T2);
- Phi.cols = T2.cols;
- Matrix_by_Diag(&T2, &dSquash[i+1], &Phi);
- acell = *Alpha[i].cells + Alpha[i].rows-1; /* points to the last element of the Alpha[i] vector */
- for(j=0; j<Alpha[i].rows; j++, acell--)
- { pcell = *Phi.cells + (Phi.cols-1); /* points to last element of Phi matrix, which is a row vector */
- for(k=0; k<Phi.cols; k++, pcell--, jcell -= Jac_T.cols)
- *jcell = (*acell)*(*pcell);
- }
- }
- }
-
- /*-------------------
- Compute the sum of squares and Jacobian for net.
- Output layer must be a singleton, so that dependent values can be
- represented as a vector of scalars. Because the SS function is only appropriate
- for scalars, to use vector output variables would need to formulate a multidimensionl
- sum of squares criteria.
- */
- double
- Compute_SS_Jac()
- {
- int i;
- DataType * y; /* pointer to actual dependent value */
- DataType * r; /* pointer to cells of residual vector */
- DataType * jcell; /* pointer to cells of Jacobian matrix, stored as transpose */
- double SS;
- DataType * handle; /* used for pseudo vector */
-
- HLock(Phi.cells);
- HLock(T2.cells);
- HLock_Alpha_dSquash();
-
- Alpha[0].cells = & handle; /* setup pseudo handle for use by Alpha[0].cells */
- /* the idea here is to set up a pseudo vector which is a row in the input
- data matrix and then use this vector as the input vector for the
- ComputeOutputJac function.
- */
-
- y = *yData.cells;
- r = *Resid.cells;
-
- jcell = *Jac_T.cells + (Jac_T.rows-1)*Jac_T.cols;
- /* points to first element of last row of Jac_T, that is, the last element
- of first column of Jacobian matrix. Since Jac_T is transpose of
- Jacobian this means it points to cell for derivative with respect
- to the last parameter for first observation. By incrementing jcell++
- at each iteration the pointer specifies the last element in each
- observation's column.
- */
- SS = 0.0;
- for(i=0; i < XData.rows; i++, r++, jcell++)
- {
- handle = *XData.cells + i*XData.cols;
- *r = y[i] - ComputeOutputJac();
- SS += (double)pow( *r , 2);
- ComputeJacobian(jcell);
- }
-
- HUnlock(Phi.cells);
- HUnlock(T2.cells);
- HUnlock_Alpha_dSquash();
- return(SS);
- }
-
- /*-------------------
- Logistic squashing function and its derivative.
- */
- static logisticSquash(vector)
- DTypeVector * vector;
- {
- register int i;
- register DataType * cell;
-
- cell = *vector->cells;
-
- for(i=0; i<vector->rows; i++, cell++)
- *cell = 1/(1+exp(-*cell));
- }
-
- /*-------------------
- Compute output for the neural net where the input values are specified by
- the Alpha[0] vector.
- */
- static DataType ComputeOutputOnly()
- {
- int i;
-
- for(i=0; i < theNet->OutLayer-1; i++) /* skip the output layer since don't want squash */
- { Matrix_by_Vec(&theNet->W[i],&Alpha[i],&Alpha[i+1]);
- logisticSquash(&Alpha[i+1]); /* logistic squash */
- }
- Matrix_by_Vec(&theNet->W[theNet->OutLayer-1], &Alpha[theNet->OutLayer-1], &Alpha[theNet->OutLayer]);
- /* no squash for output layer */
- return(**Alpha[theNet->OutLayer].cells);
- }
-
- /*-------------------
- Compute the sum of squares
- */
- double
- Compute_SS()
- {
- int i;
- DataType * y; /* pointer to actual dependent value */
- double SS;
- DataType * handle; /* used for pseudo vector */
- double r = 0.0;
-
- HLock_Alpha();
-
- Alpha[0].cells = & handle;
- y = *yData.cells;
- SS = 0.0;
- for(i=0; i < XData.rows; i++)
- {
- handle = *XData.cells + i*XData.cols;
- r = y[i] - ComputeOutputOnly();
- SS += (double)pow( r , 2);
- }
-
- HUnlock_Alpha();
-
- return(SS);
- }
-
- /*-------------------
- */
- static HLock_Alpha_dSquash()
- {
- int i;
-
- for(i=0; i<theNet->OutLayer; i++)
- { HLock(Alpha[i].cells);
- HLock(dSquash[i].cells);
- }
- }
-
- static HUnlock_Alpha_dSquash()
- {
- int i;
-
- for(i=0; i<theNet->OutLayer; i++)
- { HUnlock(Alpha[i].cells);
- HUnlock(dSquash[i].cells);
- }
- }
-
- static HLock_Alpha()
- {
- int i;
-
- for(i=0; i<theNet->OutLayer; i++)
- { HLock(Alpha[i].cells);
- }
- }
-
- static HUnlock_Alpha()
- {
- int i;
-
- for(i=0; i<theNet->OutLayer; i++)
- { HUnlock(Alpha[i].cells);
- }
- }
-
-
-